llama : resolve rwkv conflict
ggml-ci
This commit is contained in:
parent
e665b57fa2
commit
918885697e
1 changed files with 3 additions and 9 deletions
|
@ -7076,19 +7076,13 @@ struct llm_build_context {
|
|||
// 1
|
||||
// );
|
||||
|
||||
// struct ggml_tensor * last_norm_att = ggml_view_3d(ctx0, x_norm_att, n_embd, 1, n_seqs, x_norm_att->nb[1], x_norm_att->nb[2], (n_seq_tokens-1)*n_embd*ggml_element_size(x_norm_att));
|
||||
// ggml_build_forward_expand(
|
||||
// gf,
|
||||
// ggml_cpy(
|
||||
// ctx0,
|
||||
// wkv_states,
|
||||
// ggml_view_1d(
|
||||
// ctx0,
|
||||
// kv_self.v_l[il],
|
||||
// hparams.n_embd_v_s() * n_seqs,
|
||||
// hparams.n_embd_v_s() * kv_head * ggml_element_size(kv_self.v_l[il])
|
||||
// )
|
||||
// )
|
||||
// );
|
||||
// ggml_view_1d(ctx0, last_norm_att, n_embd * n_seqs, 0),
|
||||
// ggml_view_1d(ctx0, kv_self.k_l[il], hparams.n_embd_k_s() * n_seqs, hparams.n_embd_k_s() * kv_head * ggml_element_size(kv_self.k_l[il]))
|
||||
|
||||
// struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, build_rwkv6_time_mix(layer, x_norm_att, x_prev, &wkv_states, hparams.wkv_head_size, hparams.n_head_kv()));
|
||||
// ggml_build_forward_expand(gf, ffn_inp);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue