Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions src/llama-graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -659,6 +659,28 @@ ggml_tensor * llm_graph_context::build_ffn(
cur = ggml_mul(ctx0, x0, x1);
cb(cur, "ffn_mul", il);
} break;
case LLM_FFN_GEGLU:
{
// Split into two equal parts
int64_t split_point = cur->ne[0] / 2;
ggml_tensor * output_ffn_up = ggml_cont(ctx0, ggml_view_2d(
ctx0, cur, split_point,
cur->ne[1], cur->nb[1], 0
));
ggml_tensor * output_ffn_gate = ggml_cont(ctx0, ggml_view_2d(
ctx0, cur, split_point,
cur->ne[1], cur->nb[1],
split_point * ggml_element_size(cur)
));

// Apply GELU activation function to the first part
output_ffn_up = ggml_gelu(ctx0, output_ffn_up);
cb(output_ffn_up, "ffn_gelu", il);

// Element-wise multiplication between the activated part and the gate part
cur = ggml_mul(ctx0, output_ffn_up, output_ffn_gate);
cb(cur, "ffn_geglu", il);
} break;
}

if (gate && type_gate == LLM_FFN_PAR) {
Expand Down
1 change: 1 addition & 0 deletions src/llama-graph.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ enum llm_ffn_op_type {
LLM_FFN_RELU,
LLM_FFN_RELU_SQR,
LLM_FFN_SWIGLU,
LLM_FFN_GEGLU,
};

enum llm_ffn_gate_type {
Expand Down
Loading