From 5eaa4a504f865e73c0e480fb95113e67f9310ffa Mon Sep 17 00:00:00 2001
From: Jean-Marc Valin <jmvalin@amazon.com>
Date: Fri, 28 Jul 2023 19:13:00 -0400
Subject: [PATCH] Add Gated Linear Unit (GLU)

---
 dnn/nnet.c | 11 +++++++++++
 1 file changed, 11 insertions(+)

diff --git a/dnn/nnet.c b/dnn/nnet.c
index a01f3726c..1c0035d08 100644
--- a/dnn/nnet.c
+++ b/dnn/nnet.c
@@ -142,6 +142,17 @@ void compute_generic_gru(const LinearLayer *input_weights, const LinearLayer *re
      state[i] = h[i];
 }
 
+void compute_gated_activation(const LinearLayer *layer, float *output, const float *input, int activation)
+{
+   int i;
+   float act1[MAX_INPUTS];
+   celt_assert(layer->nb_inputs == layer->nb_outputs);
+   compute_linear(layer, output, input);
+   compute_activation(output, output, layer->nb_outputs, ACTIVATION_SIGMOID);
+   compute_activation(act1, input, layer->nb_outputs, activation);
+   for (i=0;i<layer->nb_outputs;i++) output[i] *= act1[i];
+}
+
 void compute_activation(float *output, const float *input, int N, int activation)
 {
    int i;
-- 
GitLab