diff --git a/examples/finetune/convert-finetune-checkpoint-to-gguf.py b/examples/finetune/convert-finetune-checkpoint-to-gguf.py index 4b9fa7c53..96d6633ed 100644 --- a/examples/finetune/convert-finetune-checkpoint-to-gguf.py +++ b/examples/finetune/convert-finetune-checkpoint-to-gguf.py @@ -131,7 +131,7 @@ class OptimizationContext: def load(self, data, offset): self.version = struct.unpack(' keybuf; @@ -1525,7 +1525,7 @@ void load_llama_lora_gguf(struct gguf_context * fctx, struct ggml_context * f_gg read_tensor_by_name(lora->norm_b, f_ggml_ctx, ggml_get_name(lora->norm_b)); read_tensor_by_name(lora->output_a, f_ggml_ctx, ggml_get_name(lora->output_a)); read_tensor_by_name(lora->output_b, f_ggml_ctx, ggml_get_name(lora->output_b)); - + for (uint32_t i = 0; i < lora->layers.size(); ++i) { auto & layer = lora->layers[i]; read_tensor_by_name(layer.attention_norm_a, f_ggml_ctx, ggml_get_name(layer.attention_norm_a)); @@ -1565,7 +1565,7 @@ void save_llama_lora_gguf(struct gguf_context * fctx, struct my_llama_model * mo gguf_set_val_u32(fctx, kv(LLM_KV_CONTEXT_LENGTH), model->hparams.n_ctx); gguf_set_val_u32(fctx, kv(LLM_KV_EMBEDDING_LENGTH), model->hparams.n_embd); - gguf_set_val_u32(fctx, kv(LLM_KV_FEED_FORWARD_LENGTH), model->hparams.n_ff); + gguf_set_val_u32(fctx, kv(LLM_KV_FEED_FORWARD_LENGTH), model->hparams.n_ff); gguf_set_val_u32(fctx, kv(LLM_KV_ATTENTION_HEAD_COUNT), model->hparams.n_head); gguf_set_val_u32(fctx, kv(LLM_KV_BLOCK_COUNT), model->hparams.n_layer); gguf_set_val_u32(fctx, kv(LLM_KV_ROPE_DIMENSION_COUNT), model->hparams.n_rot); diff --git a/ggml.c b/ggml.c index 681891c8c..49aff6243 100644 --- a/ggml.c +++ b/ggml.c @@ -16497,7 +16497,7 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor if (src0->grad) { src0->grad = ggml_add_or_set(ctx, src0->grad, - // last ggml_get_rows_back argument src0->grad is only + // last ggml_get_rows_back argument src0->grad is only // necessary to setup correct output shape ggml_get_rows_back(ctx, tensor->grad, src1, src0->grad), zero_table);